@@ -19,7 +19,7 @@ module Agents |
||
19 | 19 |
|
20 | 20 |
`url` can be a single url, or an array of urls (for example, for multiple pages with the exact same structure but different content to scrape) |
21 | 21 |
|
22 |
- The WebsiteAgent can also scrape based on incoming events. It will scrape the url contained in the `url` key of the incoming event payload, or if you set `url_on_receive` it is used as a Liquid template to generate the url to access. If you specify `merge` as the `mode`, it will retain the old payload and update it with the new values. |
|
22 |
+ The WebsiteAgent can also scrape based on incoming events. It will scrape the url contained in the `url` key of the incoming event payload, or if you set `url_from_event` it is used as a Liquid template to generate the url to access. If you specify `merge` as the `mode`, it will retain the old payload and update it with the new values. |
|
23 | 23 |
|
24 | 24 |
# Supported Document Types |
25 | 25 |
|
@@ -135,7 +135,7 @@ module Agents |
||
135 | 135 |
|
136 | 136 |
def validate_options |
137 | 137 |
# Check for required fields |
138 |
- errors.add(:base, "either url or url_on_receive is required") unless options['url'].present? || options['url_on_receive'].present? |
|
138 |
+ errors.add(:base, "either url or url_from_event is required") unless options['url'].present? || options['url_from_event'].present? |
|
139 | 139 |
errors.add(:base, "expected_update_period_in_days is required") unless options['expected_update_period_in_days'].present? |
140 | 140 |
if !options['extract'].present? && extraction_type != "json" |
141 | 141 |
errors.add(:base, "extract is required for all types except json") |
@@ -259,7 +259,7 @@ module Agents |
||
259 | 259 |
incoming_events.each do |event| |
260 | 260 |
interpolate_with(event) do |
261 | 261 |
url_to_scrape = |
262 |
- if url_template = options['url_on_receive'].presence |
|
262 |
+ if url_template = options['url_from_event'].presence |
|
263 | 263 |
interpolate_string(url_template) |
264 | 264 |
else |
265 | 265 |
event.payload['url'] |
@@ -633,11 +633,11 @@ fire: hot |
||
633 | 633 |
}.to change { Event.count }.by(1) |
634 | 634 |
end |
635 | 635 |
|
636 |
- it "should use url_on_receive as url to scrape if it exists when receiving an event" do |
|
636 |
+ it "should use url_from_event as url to scrape if it exists when receiving an event" do |
|
637 | 637 |
stub = stub_request(:any, 'http://example.org/?url=http%3A%2F%2Fxkcd.com') |
638 | 638 |
|
639 | 639 |
@checker.options = @valid_options.merge( |
640 |
- 'url_on_receive' => 'http://example.org/?url={{url | uri_escape}}' |
|
640 |
+ 'url_from_event' => 'http://example.org/?url={{url | uri_escape}}' |
|
641 | 641 |
) |
642 | 642 |
@checker.receive([@event]) |
643 | 643 |
|